import json
import shap
import tensorflow as tf
import anndata
from matplotlib import pyplot as plt
import numpy as np
import joblib
from lime import lime_image
import cv2
import pandas as pd
import PIL
from PIL import Image
import random
import scipy as sp
from scipy import ndimage as ndi
import joblib
import sys
import skimage
import pickle
import copy
import random
import scanpy as sc
from sklearn.preprocessing import LabelEncoder, OneHotEncoder, MinMaxScaler
from skimage.transform import resize
from skimage.color import rgb2hed
from skimage.feature import peak_local_max
from skimage.measure import label, regionprops
from skimage.segmentation import mark_boundaries, watershed, mark_boundaries
from skimage.segmentation import slic
from skimage.morphology import area_opening
from tensorflow.keras.applications.resnet50 import ResNet50, preprocess_input
from tensorflow.keras.preprocessing import image as image_fun
import tensorflow as tf
import tensorflow.keras.layers as L
import tensorflow.keras.backend as K
from tensorflow.keras.preprocessing.image import ImageDataGenerator
from tensorflow.keras import Model
from tensorflow.keras.applications.resnet50 import ResNet50, preprocess_input
from tensorflow.keras.preprocessing.image import load_img, img_to_array
from tensorflow.keras.preprocessing import image as image_fun
from tensorflow.keras.layers import Dense, GlobalAveragePooling2D, Input, Dropout, Lambda
import warnings
warnings.filterwarnings("ignore")
#np.set_printoptions(threshold=sys.maxsize)
def watershed_segment(image):
annotation_hed = rgb2hed(image)
annotation_h = annotation_hed[:,:,0]
annotation_h *= 255.0 / np.percentile(annotation_h, q=0.01)
thresh = skimage.filters.threshold_otsu(annotation_h)*0.7
im_fgnd_mask = sp.ndimage.morphology.binary_fill_holes(
annotation_h < thresh
)
distance = ndi.distance_transform_edt(im_fgnd_mask)
coords = peak_local_max(distance, footprint=np.ones((5, 5)), labels=im_fgnd_mask)
mask = np.zeros(distance.shape, dtype=bool)
mask[tuple(coords.T)] = True
markers, _ = ndi.label(mask)
labels = watershed(annotation_h, markers, mask=im_fgnd_mask)
im_nuclei_seg_mask = area_opening(labels, area_threshold=64).astype(np.int)
map_dic = dict(zip(np.unique(im_nuclei_seg_mask), np.arange(len(np.unique(im_nuclei_seg_mask)))))
im_nuclei_seg_mask = np.vectorize(map_dic.get)(im_nuclei_seg_mask)
return im_nuclei_seg_mask
def model_predict_gene(gene):
i = gene_list.index(gene)
def combine_model_predict(tile1):
feature1 = resnet_model.predict(tile1)
prediction = clf_resnet.predict_proba(feature1)
return prediction[i]
return combine_model_predict
def LIME(image_path, gene):
LIME_heatmaps = []; no_segments = []
for i in image_path:
image = np.asarray(image_fun.load_img(i))
try:
explanation = explainer.explain_instance(image, model_predict_gene(gene), top_labels=2, hide_color=0, num_samples=100, segmentation_fn=watershed_segment)
temp, mask = explanation.get_image_and_mask(1, positive_only=False, num_features=100, hide_rest=True)
dict_heatmap = dict(explanation.local_exp[1])
heatmap = np.vectorize(dict_heatmap.get)(explanation.segments)
except:
no_segments.append(i)
LIME_heatmaps.append(heatmap)
print(no_segments)
return LIME_heatmaps
def SHAP_global(image_path, gene):
SHAP_mask = []
for i in image_path:
masker = shap.maskers.Image("inpaint_telea", (299,299,3))
explainer_shap = shap.Explainer(model_predict_gene(gene), masker, output_names=STimage_classification_classes)
shap_values = explainer_shap(np.array(plt.imread(i).astype('float32')).reshape(1,299,299,3), max_evals=50, batch_size=50, outputs=shap.Explanation.argsort.flip[:2])
SHAP_mask.append(shap_values)
return SHAP_mask
def mask_image(zs, segmentation, image, background=None):
if background is None:
background = image.mean((0,1))
out = np.zeros((zs.shape[0], image.shape[0], image.shape[1], image.shape[2]))
for i in range(zs.shape[0]):
out[i,:,:,:] = image
for j in range(zs.shape[1]):
if zs[i,j] == 0:
out[i][segmentation == j,:] = background
return out
def model_predict_gene_kernel(gene):
i = gene_list.index(gene)
def combine_model_predict(z):
feature1 = resnet_model.predict(mask_image(z, segments_slic, image_orig, 0))
prediction = clf_resnet.predict_proba(feature1)
return prediction[i]
return combine_model_predict
def fill_segmentation(values, segmentation):
out = np.zeros(segmentation.shape)
for i in range(len(values)):
out[segmentation == i] = values[i]
return out
def scale_mask(mat,N):
mat = mat.reshape(-1,N*89401)
mat_std = (mat - mat.min()) / (mat.max() - mat.min())
mat_scaled = mat_std * (1 - 0) + 0
return mat_scaled.reshape(N,299,299)
def SHAP_scale(ShapMask,N):
ShapMask_high = []; ShapMask_high_1c = []
for i in range(0,len(ShapMask)):
ShapMask_high.append(ShapMask[i].values[:,:,:,:,np.argmax(ShapMask[i].base_values)])
ShapMask_high = np.array(ShapMask_high).reshape(N,299,299,3)
for i in range(0,len(ShapMask)):
ShapMask_high_1c.append(ShapMask_high[i,:,:,:1])
ShapMask_high_1c = np.array(ShapMask_high_1c)
return scale_mask(ShapMask_high_1c,N)
def shap_segments_scores(shap_segments,shap_values):
arr = []
for j in range(0,len(shap_segments)):
out = np.zeros((299,299))
for i in range(0,shap_segments[j].max()):
out[shap_segments[j] == i] = shap_values[j][0][0][i]
arr.append(out)
return arr
def LIME_coding_cell_type(FFPE_C3_LIMEMask_scaled_transformed):
FFPE_C3vGNAS_LIMEMask_scaled_transformed = []
a = np.array([[255]*299]*299)
b = np.array([[255]*299]*299)
for i in range(0,len(FFPE_C3_LIMEMask_scaled_transformed)):
rgb_uint8 = (np.dstack((FFPE_C3_LIMEMask_scaled_transformed[i],
a,b))*255).astype(np.uint8)
black_pixels = np.where(
(rgb_uint8[:, :, 0] < 15) &
(rgb_uint8[:, :, 1] < 15) &
(rgb_uint8[:, :, 2] < 15)
)
rgb_uint8[black_pixels] = [0, 255, 0]
rgb_uint8_im = Image.fromarray(rgb_uint8)
rgb_uint8_im_info = max(rgb_uint8_im.getcolors(rgb_uint8_im.size[0]*rgb_uint8_im.size[1]))
bcg_pixels = np.where(
(rgb_uint8[:, :, 0] == rgb_uint8_im_info[1][0]) &
(rgb_uint8[:, :, 1] == rgb_uint8_im_info[1][1]) &
(rgb_uint8[:, :, 2] == rgb_uint8_im_info[1][2])
)
rgb_uint8[bcg_pixels] = [255, 255, 255]
FFPE_C3vGNAS_LIMEMask_scaled_transformed.append(rgb_uint8)
return FFPE_C3vGNAS_LIMEMask_scaled_transformed
def SHAP_Agnostic_coding_cell_type(shap_segments_scores_C3_agnostic_scaled_transformed):
shap_segments_scores_C3vGNAS_agnostic_scaled_transformed = []
for i in range(0,len(shap_segments_scores_C3_agnostic_scaled_transformed)):
temp_arr = shap_segments_scores_C3_agnostic_scaled_transformed[i].copy()
first_max = temp_arr[np.nonzero(temp_arr)].max()
temp_arr[temp_arr >= temp_arr[np.nonzero(temp_arr)].mean()] = first_max
temp_arr[np.where(np.logical_and(temp_arr>0,temp_arr<temp_arr.max()))] = 1
temp_arr[temp_arr == 0] = temp_arr.mean()
temp_arr[temp_arr == first_max] = 0
a = np.array([[255]*299]*299)
b = np.array([[255]*299]*299)
rgb_uint8 = (np.dstack((temp_arr,a,b))*255).astype(np.uint8)
black_pixels = np.where(
(rgb_uint8[:, :, 0] < 15) &
(rgb_uint8[:, :, 1] < 15) &
(rgb_uint8[:, :, 2] < 15)
)
rgb_uint8[black_pixels] = [0, 255, 0]
rgb_uint8_im = Image.fromarray(rgb_uint8)
rgb_uint8_im_info = max(rgb_uint8_im.getcolors(rgb_uint8_im.size[0]*rgb_uint8_im.size[1]))
bcg_pixels = np.where(
(rgb_uint8[:, :, 0] == rgb_uint8_im_info[1][0]) &
(rgb_uint8[:, :, 1] == rgb_uint8_im_info[1][1]) &
(rgb_uint8[:, :, 2] == rgb_uint8_im_info[1][2])
)
rgb_uint8[bcg_pixels] = [255, 255, 255]
shap_segments_scores_C3vGNAS_agnostic_scaled_transformed.append(rgb_uint8)
return shap_segments_scores_C3vGNAS_agnostic_scaled_transformed
def SHAP_Global_coding_cell_type(FFPE_C3_ShapMask_scaled_transformed):
FFPE_C3vGNAS_ShapMask_scaled_transformed = []
a = np.array([[255]*299]*299)
b = np.array([[255]*299]*299)
for i in range(0,len(FFPE_C3_ShapMask_scaled_transformed)):
rgb_uint8 = (np.dstack((FFPE_C3_ShapMask_scaled_transformed[i],
a,b))*255).astype(np.uint8)
black_pixels = np.where(
(rgb_uint8[:, :, 0] < 15) &
(rgb_uint8[:, :, 1] < 15) &
(rgb_uint8[:, :, 2] < 15)
)
rgb_uint8[black_pixels] = [0, 255, 0]
FFPE_C3vGNAS_ShapMask_scaled_transformed.append(rgb_uint8)
return FFPE_C3vGNAS_ShapMask_scaled_transformed
Path = "/home/uqomulay/90days/STimage_outputs/"
STimage_classification_classes = ["High","Low"]
explainer = lime_image.LimeImageExplainer()
clf_resnet = joblib.load(Path+'pickle/STimage_LR.pkl')
resnet_model = ResNet50(weights="imagenet", include_top=False, input_shape=(299, 299, 3), pooling="avg")
2022-12-06 22:01:49.990590: W tensorflow/compiler/xla/stream_executor/platform/default/dso_loader.cc:64] Could not load dynamic library 'libcuda.so.1'; dlerror: libcuda.so.1: cannot open shared object file: No such file or directory; LD_LIBRARY_PATH: /scratch/90days/uqomulay/.conda/envs/STimage_Classification/lib/python3.8/site-packages/cv2/../../lib64:/opt/ohpc/pub/mpi/openmpi3-gnu8/3.1.4/lib:/opt/ohpc/pub/compiler/gcc/8.3.0/lib64 2022-12-06 22:01:49.990636: W tensorflow/compiler/xla/stream_executor/cuda/cuda_driver.cc:265] failed call to cuInit: UNKNOWN ERROR (303) 2022-12-06 22:01:49.990667: I tensorflow/compiler/xla/stream_executor/cuda/cuda_diagnostics.cc:156] kernel driver does not appear to be running on this host (delta078): /proc/driver/nvidia/version does not exist 2022-12-06 22:01:49.991071: I tensorflow/core/platform/cpu_feature_guard.cc:193] This TensorFlow binary is optimized with oneAPI Deep Neural Network Library (oneDNN) to use the following CPU instructions in performance-critical operations: AVX2 FMA To enable them in other operations, rebuild TensorFlow with the appropriate compiler flags.
FFPE_C3_LIMEMask = LIME(image_dir,gene_list[5]) FFPE_C3_LIMEMask_scaled = scale_mask(np.array(FFPE_C3_LIMEMask),25) np.save(Path+"SHAP_LIME/FFPE_C3_LIMEMask_scaled.npy",FFPE_C3_LIMEMask_scaled)
FFPE_C3_ShapMask = SHAP_global(image_dir,gene_list[5]) FFPE_C3_ShapMask_scaled = SHAP_scale(FFPE_C3_ShapMask,25) with open(Path+'SHAP_LIME/FFPE_C3_ShapMask_scaled', "wb") as fp: pickle.dump(FFPE_C3_ShapMask_scaled, fp)
FFPE_GNAS_LIMEMask = LIME(image_dir,gene_list[9]) FFPE_GNAS_LIMEMask_scaled = scale_mask(np.array(FFPE_GNAS_LIMEMask),25) np.save(Path+"SHAP_LIME/FFPE_GNAS_LIMEMask_scaled.npy",FFPE_GNAS_LIMEMask_scaled)
FFPE_GNAS_ShapMask = SHAP_global(image_dir,gene_list[9]) FFPE_GNAS_ShapMask_scaled = SHAP_scale(FFPE_GNAS_ShapMask,25) with open(Path+'SHAP_LIME/FFPE_GNAS_ShapMask_scaled', "wb") as fp: pickle.dump(FFPE_GNAS_ShapMask_scaled, fp)
shap_values_GNAS_agnostic = []; shap_segments_GNAS_agnostic = [] for i in range(0,len(image_dir)): image = Image.open(image_dir[i]) image_orig = img_to_array(image) segments_slic = watershed_segment(image) shap_segments_GNAS_agnostic.append(segments_slic) explainer = shap.KernelExplainer(model_predict_gene_kernel("GNAS"), np.zeros((1,shap_segments_GNAS_agnostic[i].max()))) shap_values_GNAS_agnostic.append(explainer.shap_values(np.ones((1,shap_segments_GNAS_agnostic[i].max())), nsamples=shap_segments_GNAS_agnostic[i].max()))
with open(Path+'SHAP_LIME/shap_segments_GNAS_agnostic', "wb") as fp: pickle.dump(shap_segments_GNAS_agnostic, fp) with open(Path+'SHAP_LIME/shap_values_GNAS_agnostic', "wb") as fp: pickle.dump(shap_values_GNAS_agnostic, fp)
shap_values_C3_agnostic = []; shap_segments_C3_agnostic = [] for i in range(0,len(image_dir)): image = Image.open(image_dir[i]) image_orig = img_to_array(image) segments_slic = watershed_segment(image) shap_segments_C3_agnostic.append(segments_slic) explainer = shap.KernelExplainer(model_predict_gene_kernel("C3"), np.zeros((1,shap_segments_C3_agnostic[i].max()))) shap_values_C3_agnostic.append(explainer.shap_values(np.ones((1,shap_segments_C3_agnostic[i].max())), nsamples=shap_segments_C3_agnostic[i].max()))
with open(Path+'SHAP_LIME/shap_segments_C3_agnostic', "wb") as fp: pickle.dump(shap_segments_C3_agnostic, fp) with open(Path+'SHAP_LIME/shap_values_C3_agnostic', "wb") as fp: pickle.dump(shap_values_C3_agnostic, fp)
#New Path
Path = "/home/uqomulay/90days/STimage_outputs/SHAP_LIME/"
#Old Path
#Old_Path = "/home/uqomulay/90days/STimage_outputs/SHAP_LIME_FFPE/"
image_dir = np.load(Path+'image_dir',allow_pickle=True)
FFPE_C3_ShapMask_scaled = np.load(Path+'FFPE_C3_ShapMask_scaled',allow_pickle=True)
FFPE_C3_LIMEMask_scaled = np.load(Path+"FFPE_C3_LIMEMask_scaled.npy")
shap_values_C3_agnostic = np.load(Path+"shap_values_C3_agnostic",allow_pickle=True)
shap_segments_C3_agnostic = np.load(Path+"shap_segments_C3_agnostic",allow_pickle=True)
FFPE_GNAS_ShapMask_scaled = np.load(Path+'FFPE_GNAS_ShapMask_scaled',allow_pickle=True)
FFPE_GNAS_LIMEMask_scaled = np.load(Path+"FFPE_GNAS_LIMEMask_scaled.npy")
shap_values_GNAS_agnostic = np.load(Path+"shap_values_GNAS_agnostic",allow_pickle=True)
shap_segments_GNAS_agnostic = np.load(Path+"shap_segments_GNAS_agnostic",allow_pickle=True)
plt.figure(figsize=(10,10))
for num, x in enumerate(image_dir):
img = Image.open(x)
plt.subplot(5,5,num+1)
plt.axis('off')
plt.imshow(img)
plt.figure(figsize=(10,10))
for i in range(len(FFPE_GNAS_LIMEMask_scaled)):
plt.subplot(5,5,i+1)
plt.axis('off')
plt.imshow(FFPE_GNAS_LIMEMask_scaled[i])
plt.imshow(Image.open(image_dir[i]),alpha=0.5)
plt.figure(figsize=(10,10))
for i in range(len(FFPE_GNAS_LIMEMask_scaled)):
plt.subplot(5,5,i+1)
plt.axis('off')
plt.imshow(FFPE_GNAS_LIMEMask_scaled[i])
plt.imshow(Image.open(image_dir[i]),alpha=0.5)
plt.figure(figsize=(10,10))
for i in range(len(FFPE_C3_LIMEMask_scaled)):
plt.subplot(5,5,i+1)
plt.axis('off')
plt.imshow(FFPE_C3_LIMEMask_scaled[i])
plt.imshow(Image.open(image_dir[i]),alpha=0.5)
plt.figure(figsize=(10,10))
for i in range(len(FFPE_C3_LIMEMask_scaled)):
plt.subplot(5,5,i+1)
plt.axis('off')
plt.imshow(FFPE_C3_LIMEMask_scaled[i])
plt.imshow(Image.open(image_dir[i]),alpha=0.5)
shap_segments_scores_GNAS_agnostic = []
for j in range(0,25):
out = np.zeros((299,299))
for i in range(0,shap_segments_GNAS_agnostic[j].max()):
out[shap_segments_GNAS_agnostic[j] == i] = shap_values_GNAS_agnostic[j][0][0][i]
shap_segments_scores_GNAS_agnostic.append(out)
shap_segments_scores_C3_agnostic = []
for j in range(0,25):
out = np.zeros((299,299))
for i in range(0,shap_segments_C3_agnostic[j].max()):
out[shap_segments_C3_agnostic[j] == i] = shap_values_C3_agnostic[j][0][0][i]
shap_segments_scores_C3_agnostic.append(out)
plt.figure(figsize=(10,10))
for i in range(len(shap_segments_scores_C3_agnostic)):
plt.subplot(5,5,i+1)
plt.axis('off')
plt.imshow(255-shap_segments_scores_C3_agnostic[i])
plt.imshow(Image.open(image_dir[i]),alpha=0.5)
plt.figure(figsize=(10,10))
for i in range(len(shap_segments_scores_C3_agnostic)):
plt.subplot(5,5,i+1)
plt.axis('off')
plt.imshow(255-shap_segments_scores_C3_agnostic[i])
plt.imshow(Image.open(image_dir[i]),alpha=0.5)
plt.figure(figsize=(10,10))
for i in range(len(shap_segments_scores_GNAS_agnostic)):
plt.subplot(5,5,i+1)
plt.axis('off')
plt.imshow(255-shap_segments_scores_GNAS_agnostic[i])
plt.imshow(Image.open(image_dir[i]),alpha=0.5)
plt.figure(figsize=(10,10))
for i in range(len(shap_segments_scores_GNAS_agnostic)):
plt.subplot(5,5,i+1)
plt.axis('off')
plt.imshow(255-shap_segments_scores_GNAS_agnostic[i])
plt.imshow(Image.open(image_dir[i]),alpha=0.5)
shap_segments_scores_GNAS_agnostic_scaled = scale_mask(np.array(shap_segments_scores_GNAS_agnostic),25)
shap_segments_scores_C3_agnostic_scaled = scale_mask(np.array(shap_segments_scores_C3_agnostic),25)
FFPE_GNAS_LIMEMask_scaled_transformed = []; FFPE_C3_LIMEMask_scaled_transformed = [];
for i in range(0,len(FFPE_GNAS_LIMEMask_scaled)):
mask = np.greater(FFPE_GNAS_LIMEMask_scaled[i], FFPE_C3_LIMEMask_scaled[i])
mask_r = 1-mask
FFPE_GNAS_LIMEMask_scaled_transformed.append(mask*FFPE_GNAS_LIMEMask_scaled[i])
FFPE_C3_LIMEMask_scaled_transformed.append(mask_r*FFPE_C3_LIMEMask_scaled[i])
FFPE_GNAS_ShapMask_scaled_transformed = []; FFPE_C3_ShapMask_scaled_transformed = [];
for i in range(0,len(FFPE_GNAS_ShapMask_scaled)):
mask = np.greater(FFPE_GNAS_ShapMask_scaled[i], FFPE_C3_ShapMask_scaled[i])
mask_r = 1-mask
FFPE_GNAS_ShapMask_scaled_transformed.append(mask*FFPE_GNAS_ShapMask_scaled[i])
FFPE_C3_ShapMask_scaled_transformed.append(mask_r*FFPE_C3_ShapMask_scaled[i])
shap_segments_scores_GNAS_agnostic_scaled_transformed = []; shap_segments_scores_C3_agnostic_scaled_transformed = [];
for i in range(0,len(shap_segments_scores_GNAS_agnostic_scaled)):
mask = np.greater(shap_segments_scores_GNAS_agnostic_scaled[i], shap_segments_scores_C3_agnostic_scaled[i])
mask_r = 1-mask
shap_segments_scores_GNAS_agnostic_scaled_transformed.append(mask*shap_segments_scores_GNAS_agnostic_scaled[i])
shap_segments_scores_C3_agnostic_scaled_transformed.append(mask_r*shap_segments_scores_C3_agnostic_scaled[i])
plt.figure(figsize=(10,10))
for i in range(len(FFPE_C3_LIMEMask_scaled_transformed)):
plt.subplot(5,5,i+1)
plt.axis('off')
plt.imshow(FFPE_C3_LIMEMask_scaled_transformed[i])
plt.imshow(Image.open(image_dir[i]),alpha=0.5)
plt.figure(figsize=(10,10))
for i in range(len(FFPE_C3_LIMEMask_scaled_transformed)):
plt.subplot(5,5,i+1)
plt.axis('off')
plt.imshow(FFPE_C3_LIMEMask_scaled_transformed[i])
plt.imshow(Image.open(image_dir[i]),alpha=0.5)
plt.figure(figsize=(10,10))
for i in range(len(shap_segments_scores_C3_agnostic_scaled_transformed)):
plt.subplot(5,5,i+1)
plt.axis('off')
plt.imshow(shap_segments_scores_C3_agnostic_scaled_transformed[i])
plt.imshow(Image.open(image_dir[i]),alpha=0.5)
plt.figure(figsize=(10,10))
for i in range(len(shap_segments_scores_C3_agnostic_scaled_transformed)):
plt.subplot(5,5,i+1)
plt.axis('off')
plt.imshow(shap_segments_scores_C3_agnostic_scaled_transformed[i])
plt.imshow(Image.open(image_dir[i]),alpha=0.5)
def LIME_coding_cell_type(FFPE_C3_LIMEMask_scaled_transformed):
FFPE_C3vGNAS_LIMEMask_scaled_transformed = []
a = np.array([[255]*299]*299)
b = np.array([[255]*299]*299)
for i in range(0,len(FFPE_C3_LIMEMask_scaled_transformed)):
rgb_uint8 = (np.dstack((FFPE_C3_LIMEMask_scaled_transformed[i],
a,b))*255).astype(np.uint8)
black_pixels = np.where(
(rgb_uint8[:, :, 0] < 15) &
(rgb_uint8[:, :, 1] < 15) &
(rgb_uint8[:, :, 2] < 15)
)
rgb_uint8[black_pixels] = [0, 255, 0]
rgb_uint8_im = Image.fromarray(rgb_uint8)
rgb_uint8_im_info = max(rgb_uint8_im.getcolors(rgb_uint8_im.size[0]*rgb_uint8_im.size[1]))
bcg_pixels = np.where(
(rgb_uint8[:, :, 0] == rgb_uint8_im_info[1][0]) &
(rgb_uint8[:, :, 1] == rgb_uint8_im_info[1][1]) &
(rgb_uint8[:, :, 2] == rgb_uint8_im_info[1][2])
)
rgb_uint8[bcg_pixels] = [255, 255, 255]
FFPE_C3vGNAS_LIMEMask_scaled_transformed.append(rgb_uint8)
return FFPE_C3vGNAS_LIMEMask_scaled_transformed
FFPE_C3vGNAS_LIMEMask_scaled_transformed = LIME_coding_cell_type(FFPE_C3_LIMEMask_scaled_transformed)
plt.figure(figsize=(10,10))
for i in range(len(FFPE_C3vGNAS_LIMEMask_scaled_transformed)):
plt.subplot(5,5,i+1)
plt.axis('off')
plt.imshow(FFPE_C3vGNAS_LIMEMask_scaled_transformed[i])
plt.imshow(Image.open(image_dir[i]),alpha=0.5)
FFPE_C3vGNAS_LIMEMask_scaled_transformed = LIME_coding_cell_type(FFPE_C3_LIMEMask_scaled_transformed)
plt.figure(figsize=(10,10))
for i in range(len(FFPE_C3vGNAS_LIMEMask_scaled_transformed)):
plt.subplot(5,5,i+1)
plt.axis('off')
plt.imshow(FFPE_C3vGNAS_LIMEMask_scaled_transformed[i])
plt.imshow(Image.open(image_dir[i]),alpha=0.5)
def SHAP_Agnostic_coding_cell_type(shap_segments_scores_C3_agnostic_scaled_transformed):
shap_segments_scores_C3vGNAS_agnostic_scaled_transformed = []
for i in range(0,len(shap_segments_scores_C3_agnostic_scaled_transformed)):
temp_arr = shap_segments_scores_C3_agnostic_scaled_transformed[i].copy()
first_max = temp_arr[np.nonzero(temp_arr)].max()
temp_arr[temp_arr >= temp_arr[np.nonzero(temp_arr)].mean()] = first_max
temp_arr[np.where(np.logical_and(temp_arr>0,temp_arr<temp_arr.max()))] = 1
temp_arr[temp_arr == 0] = temp_arr.mean()
temp_arr[temp_arr == first_max] = 0
a = np.array([[255]*299]*299)
b = np.array([[255]*299]*299)
rgb_uint8 = (np.dstack((temp_arr,a,b))*255).astype(np.uint8)
black_pixels = np.where(
(rgb_uint8[:, :, 0] < 15) &
(rgb_uint8[:, :, 1] < 15) &
(rgb_uint8[:, :, 2] < 15)
)
rgb_uint8[black_pixels] = [0, 255, 0]
rgb_uint8_im = Image.fromarray(rgb_uint8)
rgb_uint8_im_info = max(rgb_uint8_im.getcolors(rgb_uint8_im.size[0]*rgb_uint8_im.size[1]))
bcg_pixels = np.where(
(rgb_uint8[:, :, 0] == rgb_uint8_im_info[1][0]) &
(rgb_uint8[:, :, 1] == rgb_uint8_im_info[1][1]) &
(rgb_uint8[:, :, 2] == rgb_uint8_im_info[1][2])
)
rgb_uint8[bcg_pixels] = [255, 255, 255]
shap_segments_scores_C3vGNAS_agnostic_scaled_transformed.append(rgb_uint8)
return shap_segments_scores_C3vGNAS_agnostic_scaled_transformed
shap_segments_scores_C3vGNAS_agnostic_scaled_transformed = SHAP_Agnostic_coding_cell_type(shap_segments_scores_C3_agnostic_scaled_transformed)
plt.figure(figsize=(10,10))
for i in range(len(shap_segments_scores_C3vGNAS_agnostic_scaled_transformed)):
plt.subplot(5,5,i+1)
plt.axis('off')
plt.imshow(shap_segments_scores_C3vGNAS_agnostic_scaled_transformed[i])
plt.imshow(Image.open(image_dir[i]),alpha=0.5)
shap_segments_scores_C3vGNAS_agnostic_scaled_transformed = SHAP_Agnostic_coding_cell_type(shap_segments_scores_C3_agnostic_scaled_transformed)
plt.figure(figsize=(10,10))
for i in range(len(shap_segments_scores_C3vGNAS_agnostic_scaled_transformed)):
plt.subplot(5,5,i+1)
plt.axis('off')
plt.imshow(shap_segments_scores_C3vGNAS_agnostic_scaled_transformed[i])
plt.imshow(Image.open(image_dir[i]),alpha=0.5)